Returned data with node js and mysql - mysql

When i connect with mysql ,i get this value { id: 14 }
actually i wanna get only 14
this is my code
app.get('/register',function(req,res){
var data = {
"error":1,
"result":""
};
console.log("ams");
connection.query("SELECT id from register",function(err, rows, fields){
if(rows.length != 0){
data["error"] = 0;
data["result"] = rows;
res.json(data);
console.log(data.result[0]);
}else{
data["result"] = 'No data Found..';
res.json(data);
}
});
});

If you're expecting one row only, you could set
data["result"] = rows[0].id;
However, your response is an array of json objects no matter how many results you get. It's much better to setup the res.json() receiver to work with objects, not plain strings/numbers.
It is not possible to get only values as a mysql query result in node with node-mysql (and I don't think there's any other library that would do that, because it doesn't make sense).

Related

Node + Mysql: How to execute more queries based on another query

I'm pretty new to the node world and trying to migrate our php application to node. To be able to return all article data several different queries have to be done depending on the results of the first query. Currently my data object is empty as it's returned before the two queries run. How can I "chain" these queries using a promised based approach.
I found a library https://github.com/lukeb-uk/node-promise-mysql which I think could help but I have no idea how to implement it with my code.
exports.getArticleData = function(req, done) {
pool.getConnection(function(error, connection) {
if (error) throw error;
var data = {
article: {},
listicles: []
};
// Inital query
connection.query(
`SELECT article_id, title, is_listicle_article, FROM com_magazine_articles AS article WHERE article_id = ${req
.params.articleId}`,
function(error, results) {
data.article = results;
}
);
// This query should only be excuted if is_listicle_article = true
if (data.article.is_listicle_article) {
connection.query(
`SELECT * FROM com_magazine_article_listicles WHERE article_id = ${req.params
.articleId}`,
function(error, results) {
data.listicle = results;
}
);
}
// More queries depending on the result of the first one
// ....
// ....
// Callback with the data object
done(data);
connection.release();
});
};
What would be the best approach to execute queries based on other queries results? Any help is really appreciated.
The functionality you are looking for is Promise chaining, it allows you to construct a sequence of promises, each depending on the result of the previous value. Applying this to your code, you would get something like this:
exports.getArticleData = function(req, done) {
pool.getConnection(function(error, connection) {
if (error) throw error;
// Inital query
return connection.query(
`SELECT article_id, title, is_listicle_article, FROM com_magazine_articles AS article WHERE article_id = ${req
.params.articleId}`
).then((rows) => {
return Promise.all(rows.map((article) => {
if (article.is_listicle_article) {
return connection.query(
`SELECT * FROM com_magazine_article_listicles WHERE article_id = ${req.params
.articleId}`
);
} else {
return Promise.resolve(null);
}
}));
}).then((res) => {
connection.release();
done(res.filter(function(i){ return i != null; }));
})
// This query should only be excuted if is_listicle_article = true
// More queries depending on the result of the first one
// ....
// ....
// Callback with the data object
connection.release();
});
};
Obviously since I don't have all of your code, I couldn't verify this example, but this should be roughly the functionality you are looking for. That said, I think there were a couple of mistakes you should watch out for in your example code:
connection.query() returns a promise (aka doesn't need a callback function). Use this functionality to your advantage- it will make your code prettier.
connection.query() returns an array of rows, not a single value. You seemed to ignore this in your example code.
Try not to save things into a variable when using promises, it isn't necessary. To remedy this, read more into the Promise API (Promise.resolve(), Promise.reject(), Promise.any(), Promise.catch(), Promise.all()) etc.
It seems like these SQL queries could easily be combined into a single query. This will be way more efficient that performing two operations. Not sure if this is the case with the remaining queries you wish to use, but definitely something to look out for.

Using KnexJS to query X number of tables?

I have a unique situation here which I am having trouble solving in an elegant fashion.
A user passes up an array of signals which they want to export data for. This array can be 1 -> Any_Number so first I go fetch the table names (each signal stores data in a separate table) based on the signals passed and store those in an object.
The next step is to iterate over that object (which contains the table names I need to query), execute the query per table and store the results in an object which will be passed to next chain in the Promise. I haven't seen any examples online of good ways to handle this but I know it's a fairly unique scenario.
My code prior to attempting to add support for arrays of signals was simply the following:
exports.getRawDataForExport = function(data) {
return new Promise(function(resolve, reject) {
var getTableName = function() {
return knex('monitored_parameter')
.where('device_id', data.device_id)
.andWhere('internal_name', data.param)
.first()
.then(function(row) {
if(row) {
var resp = {"table" : 'monitored_parameter_data_' + row.id, "param" : row.display_name};
return resp;
}
});
}
var getData = function(runningResult) {
return knexHistory(runningResult.table)
.select('data_value as value', 'unit', 'created')
.then(function(rows) {
runningResult.data = rows;
return runningResult;
});
}
var createFile = function(runningResult) {
var fields = ['value', 'unit', 'created'],
csvFileName = filePathExport + runningResult.param + '_export.csv',
zipFileName = filePathExport + runningResult.param + '_export.gz';
var csv = json2csv({data : runningResult.data, fields : fields, doubleQuotes : ''});
fs.writeFileSync(csvFileName, csv);
// create streams for gZipping
var input = fs.createReadStream(csvFileName);
var output = fs.createWriteStream(zipFileName);
// gZip
input.pipe(gzip).pipe(output);
return zipFileName;
}
getTableName()
.then(getData)
.then(createFile)
.then(function(zipFile) {
resolve(zipFile);
});
});
}
Obviously that works fine for a single table and I have gotten the getTableName() and createFile() methods updated to handle arrays of data so this question only pertains to the getData() method.
Cheers!
This kind of problem is far from unique and, approached the right way, is very simply solved.
Don't rewrite any of the three internal functions.
Just purge the explicit promise construction antipattern from .getRawDataForExport() such that it returns a naturally occurring promise and propagates asynchronous errors to the caller.
return getTableName()
.then(getData)
.then(createFile);
Now, .getRawDataForExport() is the basic building-block for your multiple "gets".
Then, a design choice; parallel versus sequential operations. Both are very well documented.
Parallel:
exports.getMultiple = function(arrayOfSignals) {
return Promise.all(arrayOfSignals.map(getRawDataForExport));
};
Sequential:
exports.getMultiple = function(arrayOfSignals) {
return arrayOfSignals.reduce(function(promise, signal) {
return promise.then(function() {
return getRawDataForExport(signal);
});
}, Promise.resolve());
};
In the first instance, for best potential performance, try parallel.
If the server chokes, or is likely ever to choke, on parallel operations, choose sequential.

How can I stream a JSON Array from NodeJS to postgres

I am trying to insert couple of millions records (with approximately 6 fields/columns) by receiving in requests from clients 10,000 records per bulk insert attempt (using sequelize.js and bulkCreate())
This obviously was a bad idea, so I tried looking into node-pg-copy-streams
However, I do not want to initiate a change on the client side, where a json array is sent as such
# python
data = [
{
"column a":"a values",
"column b":"b values",
},
...
# 10,000 items
...
]
request.post(data=json.dumps(data), url=url)
On the Server side in nodejs, how would I stream the received request.body in the following skeleton ?
.post(function(req, res){
// old sequelize code
/* table5.bulkCreate(
req.body, {raw:true}
).then(function(){
return table5.findAll();
}).then(function(result){
res.json(result.count);
});*/
// new pg-copy-streams code
pg.connect(function(err, client, done) {
var stream = client.query(copyFrom('COPY my_table FROM STDIN'));
// My question is here, how would I stream or pipe the request body ?
// ?.on('error', done);
// ?.pipe(stream).on('finish', done).on('error', done);
});
});
Here's how I solved my problem,
First a function to convert my req.body dict to a TSV (not a part of the initial problem)
/**
* Converts a dictionary and set of keys to a Tab Separated Value blob of text
* #param {Dictionary object} dict
* #param {Array of Keys} keys
* #return {Concatenated Tab Separated Values} String
*/
function convertDictsToTSV(dicts, keys){
// ...
}
Second the rest of my original .post function
.post(function(req, res){
// ...
/* requires 'stream' as
* var stream = require('stream');
* var copyFrom = require('pg-copy-streams').from;
*/
var read_stream_string = new stream.Readable();
read_stream_string.read = function noop() {};
var keys = [...]; // set of dictionary keys to extract from req.body
read_stream_string.push(convertDictsToTSV(req.body, keys));
read_stream_string.push(null);
pg.connect(connectionString, function(err, client, done) {
// ...
// error handling
// ...
var copy_string = 'Copy tablename (' + keys.join(',') + ') FROM STDIN'
var pg_copy_stream = client.query( copyFrom( copy_string ) );
read_stream_string.pipe(pg_copy_stream).on('finish', function(finished){
// handle finished and done appropriately
}).on('error', function(errored){
// handle errored and done appropriately
});
});
pg.end();
});
Technically, there is no streaming here, not in terms of how NodeJS streaming works.
You are sending a chunk of 10,000 records each time and expect your server-side to insert those and return an OK to the client to send another 10,000 records. That's throttling/paging data in, not streaming.
Once your server has received the next 10,000 records, insert them (usually as a transaction), and then respond with OK back to the client so it can send the next 10,000 records.
Writing transactions with node-postgres isn't an easy task, as it is too low-level for that.
Below is an example of how to do that with the help of pg-promise:
function insertRecords(records) {
return db.tx(t=> {
var inserts = [];
records.forEach(r=> {
var query = t.none("INSERT INTO table(fieldA, ...) VALUES(${propA}, ...)", r);
inserts.push(query);
});
return t.batch(inserts);
});
}
Then inside your HTTP handler, you would write:
function myPostHandler(req, res) {
// var records = get records from the request;
insertRecords(records)
.then(data=> {
// set response as success;
})
.catch(error=> {
// set response as error;
});
}

Simple Express program for querying a result

I have a snippet of Express code
Below what i am trying to do is pass the table name to keyName by extracting from the request
But I am facing deaslock
i wanted to know whether i am following proper protocols for JSON response
[Part-of-Express-Code]
app.get('/RestaurantDesc/:Key',function(request,response,next){
var keyName=request.query.Key;
var name_of_restaurants, RestaurantTimings;
async.series( [
// Get the first table contents
function ( callback ) {
connection.query('SELECT * FROM ',keyName, function(err, rows, fields)
{
console.log('Connection result error '+err);
name_of_restaurants = rows;
callback();
});
},
// Get the second table contents
function ( callback ) {
connection.query('SELECT * FROM RestaurantTimings', function(err, rows, fields)
{
console.log('Connection result error '+err);
RestaurantTimings = rows;
callback();
});
}
// Send the response
], function ( error, results ) {
response.json({
'restaurants' : name_of_restaurants,
'RestaurantTimings' : RestaurantTimings
});
} );
} );
I am getting the output as Cannot GET /RestaurantDesc/
Any Ideas
your route should be path, A path that you can access through GET request.
for ex: you should be able to access it through
http://example.com/RestaurantDesc/anyKeyHere
and in your code you have
var keyName = request.query.Key
req.query contains query variables see http://expressjs.com/api.html#req.query
So your keyName variable won't contain anyKeyHere.
req.params.Key will contain value anyKeyHere;
but you will need to pass it in url path.
if you need to pass key data in query you can do this.
app.get('/RestaurantDesc',function(request,response,next){
var keyName=request.query.Key;
});
and pass key like this in your url
http://example.com/RestaurantDesc/?Key=restaurnetkeyHere
Try going through guide in express site and understand routings and how it works.
If you getting "Cannot GET /RestaurantDesc/" is because you have not setup this route, try /RestaurantDesc/something. request.query is used for search terms, ie things that come after a questionmaek in a url. Use request.param.Key instead.
Also for best practices you should lowercase resource names and use the shorter req/res instead of request/response.

Returning JSON via node.js response.write has unexpected results

Here is a snippet:
objects = [];
client.keys 'objects*', (err,keys) ->
for (i = 0; i < keys.length; i++){
client.hgetall(keys[i], function(err, obj) {
objects.push(obj);
if (i === keys.length){
response.writeHead(200, {'Content-Type': 'application/json'});
console.log(JSON.stringify(objects));
response.write(JSON.stringify(objects));
response.end();
}
}
}
The var objects is an array of objects I populate by querying redis via node-redis, each with 6 properties. In the console, I get exactly what I would expect. In a client, however, I receive an array with only the last object. Wondering if it was the client's deseralizer, I captured it in Fiddler and it is still that array with a single object.
Here is what I get in the console via console.log:
[{"prop1":"11","prop2":"12","prop3":"13","prop4":"14","prop5":"15","prop6":"16"}, {"prop1":"21","prop2":"22","prop3":"23","prop4":"24","prop5":"25","prop6":"26"},{"prop1":"31","prop2":"32","prop3":"33","prop4":"34","prop5":"35","prop6":"36"},{"prop1":"41","prop2":"42","prop3":"43","prop4":"44","prop5":"45","prop6":"46"},{"prop1":"51","prop2":"52","prop3":"53","prop4":"54","prop5":"55","prop6":"56"},{"prop1":"61","prop2":"62","prop3":"63","prop4":"64","prop5":"65","prop6":"66"}]
I must be doing something wrong...
Your async code is wrong. The line if (i === keys.length) will always return true because you are checking if client.hgetall has been called keys.length times and not if the calls have been completed. You should instead be checking (objects.length === keys.length) – DeaDEnD