Tracking DB querying time - Bookshelf/knex - mysql

I would like to monitor the time taken by a query on my API's db. I so created the following function, using bookshelf-signals, a Bookshelf plugin. :
bookshelf.on('fetching', () => {
server.app.fetching = new Date().valueOf();
});
bookshelf.on('counting', () => {
server.app.fetching = new Date().valueOf();
});
bookshelf.on('fetched', () => {
server.statsd.gauge('db_query', new Date().valueOf() - server.app.fetching);
});
... so that I can retrieve the time just before and just after a fetch/count; I did the same with deleting-deleted and saving-saved.
What I think I fail to understand is when fetching and fetched are supposed to be triggered... When I tried to to see when fetching and fetched were triggered, basically it ended up with this :
'fetching event A'
'fetching event B'
'fetching event C'
'fetched event C'
'fetched event B'
'fetched event A'
Resulting in the timers returning wrong values obliviously, do you have any lead/clue ?
I also saw that one could trigger 'query' events on Knex, and thought of using this as an alternative solution. However, it seems that it only works if I specify the table where I query, ie :
knex('whatever_table').on('query', () => {///});
Making it impracticable in the case where I want to apply an event handler on every model...
I think I should stick with Bookshelf, but how can I do with the way the events are handled?
Thank you in advance!

I just wrote some small test code how to trace transaction duration with knex.
https://runkit.com/embed/679qu91ylu4w
/**
* Calculate transaction durations in knex
*
*/
require('sqlite3');
var knex = require("knex")({
client: 'sqlite',
connection: ':memory:',
pool: { min: 1, max: 10 }
});
function isTransactionStart(querySpec) {
return querySpec.sql === 'BEGIN;';
}
function isTransactionEnd(querySpec) {
return querySpec.sql === 'COMMIT;' || querySpec.sql === 'ROLLBACK;';
}
const transactionDurations = {};
knex.on('query', querySpec => {
console.log('On query', querySpec);
if (isTransactionStart(querySpec)) {
if (transactionDurations[querySpec.__knexUid]) {
console.error('New transaction started, before earlier was ended');
return;
}
transactionDurations[querySpec.__knexUid] = new Date().getTime();
}
if (isTransactionEnd(querySpec)) {
const startTime = transactionDurations[querySpec.__knexUid];
if (!startTime) {
console.error('Transaction end detected, but start time not found');
}
const endTime = new Date().getTime();
transactionDurations[querySpec.__knexUid] = null;
console.log('TRANSACTION DURATION', endTime - startTime);
}
});
// just as an example of other available events to show when they are called
knex.on('query-response', (res, querySpec) => {
// console.log('On query response', res, querySpec);
});
knex.on('query-error', (err, querySpec) => {
// console.log('On query error', err, querySpec);
});
try {
a = await Promise.all([
knex.transaction(trx => {
return trx.raw('select 1');
}),
knex.transaction(trx => {
return trx.raw('select 2');
}),
knex.transaction(trx => {
return trx.raw('error me');
})
]);
} catch (e) {
console.log('Got ERROR:', e);
}
The same king of approach should work also for query timing. To prevent timer bookkeeping from leaking memory you should add some cleanup code though.
Query duration timer should be started in query event and stopped in query-response or query-error depending which one triggers first.
To be able to match query - query-response pair querySpec.__knexQueryUid attribute can be used.

Based on Mikael Lepistö snippet I came up with this :
const dbEvents = (server, sdc) => {
knex.on('query', data => {
server.app[data.__knexQueryUid + ''] = new Date().valueOf();
});
knex.on('query-response', (data, obj, builder) => {
sdc.counter('db_queries_time', new Date().valueOf() - server.app[obj.__knexQueryUid + '']);
sdc.increment('nr_db_queries');
});
};
And I then call the function when I start the server - I am working with Hapijs.
EDIT: sdc is a statsd client, I use it to send the DB time :)

Related

Dialogflow ES fulfillment did not reply

[Update] with the entire code
// See https://github.com/dialogflow/dialogflow-fulfillment-nodejs
// for Dialogflow fulfillment library docs, samples, and to report issues
'use strict';
const functions = require('firebase-functions');
const {WebhookClient} = require('dialogflow-fulfillment');
const {Card, Suggestion} = require('dialogflow-fulfillment');
const admin = require('firebase-admin');
process.env.DEBUG = 'dialogflow:debug'; // enables lib debugging statements
admin.initializeApp(functions.config().firebase);
const db = admin.firestore();
const settings = {/* your settings... */ timestampsInSnapshots: true};
db.settings(settings);
exports.dialogflowFirebaseFulfillment = functions.https.onRequest((request, response) => {
const agent = new WebhookClient({ request, response });
// console.log('Dialogflow Request headers: ' + JSON.stringify(request.headers));
console.log('Dialogflow Request body: ' + JSON.stringify(request.body));
function writeToDb (agent) {
const startDate = agent.parameters.startdate;
// Always assume user enter time in the past (for existing time)
console.log("start date:" + startDate);
if (Date.parse(startDate) > Date.now()) {
startDate.setFullYear(startDate.getFullYear() - 1);
// console.log("modify start date to: " + startDate);
}
const dfRef = db.collection('period').doc(request.body.originalDetectIntentRequest.payload.data.sender.id);
agent.add('Got it. That me write it done for ya');
dfRef.get().then(user => {
if(!user.exists) {
dfRef.create({dates: admin.firestore.FieldValue.arrayUnion(startDate)})
.then(() => {
}).catch(err => {
console.log('error create firestore date entry:' + `${err}`);
});
} else {
dfRef.update({dates: admin.firestore.FieldValue.arrayUnion(startDate)})
.then(() => {
}).catch(err => {
console.log('error update firestore date entry:' + `${err}`);
});
}
}).catch(err => {
console.log('error access firestore date:' + `${err}`);
});
}
function readFromDb (agent) {
// Get the database collection 'dialogflow' and document 'agent'
const startDate = agent.parameters.startdate;
const future = agent.parameters.future;
const dialogflowAgentDoc = db.collection('period').doc(request.body.originalDetectIntentRequest.payload.data.sender.id);
// Get the value of 'entry' in the document and send it to the user
return dialogflowAgentDoc.get()
.then(doc => {
if (doc.exists) {
var darray = doc.data().dates;
if (darray.length > 0) {
if (future) {
agent.add('let me calculate for you..');
var next = new Date(darray[darray.length-2]);
const dayDiff = calculateSchedule(darray);
next.setDate(next.getDate() + dayDiff * 1);
agent.add(next.toLocaleDateString('en-us', {month:"short", day: 'numeric', weekday: 'short'}));
} else {
agent.add('let me look up for you..');
agent.add(new Date(darray[darray.length-1]).toLocaleDateString('en-us', {month:"short", day: 'numeric', weekday: 'short'}));
}
} else {
agent.add('I cant find anything :( ');
}
} else {
agent.add('something was wrong, I cant find your record :/');
}
return Promise.resolve('complete!');
}).catch(err => {
agent.add(`Error reading entry from the Firestore database. ${err}`);
});
}
function calculateSchedule(arr) {
// emitted..
}
// Run the proper function handler based on the matched Dialogflow intent name
let intentMap = new Map();
intentMap.set('it start today', writeToDb);
intentMap.set('when did it start', readFromDb);
agent.handleRequest(intentMap);
});
[Original]
Hi my dialogflow ES is connected to fb messenger and the purpose is to reply a message after I recorded what the customer say into DB, I checked if the document exist: here is my code
function writeToDb (agent) {
const startDate = agent.parameters.startdate;
const dfRef = db.collection('start').doc('my_id');
dfRef.get().then(user => {
if(!user.exists) {
dfRef.create({dates: admin.firestore.FieldValue.arrayUnion(startDate)})
.then(() => {
agent.add('Got it. let me write it down for ya');
}).catch(err => {
console.log(`${err}`);
});
} else {
dfRef.update({dates: admin.firestore.FieldValue.arrayUnion(startDate)})
.then(() => {
agent.add('Got it. let me write it down for ya');
}).catch(err => {
console.log(`${err}`);
});
}
});
}
the startDate value is successfully store in the firestore. However, I never get the reply message, is there anything I did wrong? I felt it should be simple enough.
Thanks for your help.
You are missing to set up the IntentMap() see step 5 and 6.
In the intentMap, you will need to add a map from the Intent name to a function that will do the handling when that Intent triggers the webhook. You can have a different handler function for each Intent, use the same function for some, have those functions call other functions, whatever you need. Finally use agent.handleRequest(intentMap);.

Cannot pause pool when streaming data with mysql-node

I use node and the mysql package to stream data from node to client.
The idea is,
define a pool, and queries based on the pool.
Then pass the streaming rows to an array.
If that array's length reaches a length, pause the stream, process the rows, send them to client via websockets.
Resume stream. Repeat until no other rows are left.
I am following the examples on the mysql npm page but I get pool.pause is not a function
Here is the code
var pool = mysql.createPool({
connectionLimit : 100,
host : config.host,
user : config.user,
password : config.password,
database : config.database
});
//turn simple queries to promises
const query = (str, ar) => {
return new Promise(function(resolve, reject) {
pool.query(str, ar, function (error, results, fields) {
if (error) {
return reject(error);
}
resolve({results, fields});
});
})//promise
}
const userdetails = (ws, data) => {
//do a check, unrelated to streaming
query('SELECT COUNT(id) as countrows FROM users WHERE category = ? ', [data.category])
.then((data)=>{
if(data.results[0].countrows > 5000){
// if more than 5000, we stream
// the following is based on the mysql code found in their page
// it has no relation to the promise-based query above
var query = pool.query('SELECT id, name, address, sale, preexisting, amount FROM users WHERE category = ? ', [data.category])
query.on('result', row => {
rowsToProcess.push(row);
if (rowsToProcess.length >= 100) {
pool.pause();
processRows();
}
});
query.on('end', () => {
processRows();
});
const processRows = (done) => {
//process some data
//send them back using websockets
ws.send(JSON.stringify({ data }));
pool.resume();
}
}
})
}
I dont know if this is related to making a simple query , a promise or using the pool, or anything else. This gives the TypeError: pool.pause is not a function and I cannot fix it. Please advice.
Thanks
You can try this solution,
I have used this many times:
const mysqlStreamQueryPromise = (queryString, params) => {
return new Promise((resolve, reject) => {
let streamData = connection.query(queryString,params).stream();
let data = [];
streamData.on('data', item => {
streamData.pause();
data.push(item);
streamData.resume();
});
streamData.on('end', end => {
return resolve(data);
});
streamData.on('error', error => {
return reject(error);
});
});
}
Use this
var pool = mysql.createPool({
connectionLimit : 100,
host : config.host,
user : config.user,
password : config.password,
database : config.database
});
//turn simple queries to promises
const query = (str, ar) => {
return new Promise(function(resolve, reject) {
pool.query(str, ar, function (error, results, fields) {
if (error) {
return reject(error);
}
resolve({results, fields});
});
})//promise
}
const userdetails = (ws, data) => {
//do a check, unrelated to streaming
query('SELECT COUNT(id) as countrows FROM users WHERE category = ? ', [data.category])
.then((data)=>{
if(data.results[0].countrows > 5000){
// if more than 5000, we stream
// the following is based on the mysql code found in their page
// it has no relation to the promise-based query above
var query = pool.query('SELECT id, name, address, sale, preexisting, amount FROM users WHERE category = ? ', [data.category]).stream();
query.on('result', row => {
rowsToProcess.push(row);
if (rowsToProcess.length >= 100) {
pool.pause();
processRows();
}
});
query.on('end', () => {
processRows();
});
const processRows = (done) => {
//process some data
//send them back using websockets
ws.send(JSON.stringify({ data }));
pool.resume();
}
}
})
}

node js script exits prematurely

Promise newbie here.
I'm trying to retrieve icon_name field from asset database, Equipment table in mongodb
and update icon_id field in equipments database, equipments table in mysql.
I have about 12,000 records with icon_name field in Equipment.
The script runs successfully however it doesn't seem to go through all the records.
When I check the equipments table there are only about 3,000 records updated.
I tried running the script several times and it appears to update a few more records each time.
My suspicion is the database connection is close before all the queries are finished but since I use Promise.all I don't know why it happened.
Here is the script
const _ = require('lodash'),
debug = require('debug')('update'),
Promise = require('bluebird')
const asset = require('../models/asset'),
equipments = require('../models/equipments')
const Equipment = asset.getEquipment(),
my_equipments = equipments.get_equipments(),
icons = equipments.get_icons()
Promise.resolve()
.then(() => {
debug('Retrieve asset equipments, icons')
return Promise.all([
icons.findAll(),
Equipment.find({ icon_name: { $ne: null } })
])
})
.then(([my_icons, asset_equipments]) => {
debug('Update equipments')
const updates = []
console.log(asset_equipments.length)
asset_equipments.forEach((aeq, i) => {
const icon_id = my_icons.find(icon => icon.name === aeq.icon_name).id
up = my_equipments.update(
{ icon_id },
{ where: { code: aeq.eq_id } }
)
updates.push(up)
})
return Promise.all(updates)
})
.then(() => {
debug('Success: all done')
asset.close()
equipments.close()
})
.catch(err => {
debug('Error:', err)
asset.close()
equipments.close()
})
Thanks in advance.
Code looks fine but spawning 12000 promises in parallel might cause some trouble on the database connection level. I would suggest to batch the concurrent requests and limit them to let's say 100. You could use batch-promises (https://www.npmjs.com/package/batch-promises)
Basically something like
return batchPromises(100, asset_equipments, aeq => {
const icon_id = my_icons.find(icon => icon.name === aeq.icon_name).id;
return my_equipments.update({ icon_id }, { where: { code: aeq.eq_id } });
});

How to know when the feathers-client is connected to a service

I'm trying to test an event filter however there's a timing issue that I'm not sure how to resolve. Other than wrapping the REST request in a setTimeout, how could I get this working?
const app = require('../../src/app');
const feathers = require('feathers/client')
const socketio = require('feathers-socketio/client');
const hooks = require('feathers-hooks');
const io = require('socket.io-client');
const rp = require('request-promise');
const service = app.service('users');
let server = null;
describe('\'users\' service', () => {
beforeEach((done) => {
server = app.listen('3030');
server.once('listening', done);
});
afterEach((done) => {
server.close(done);
});
it('returns stuff #test', (done) => {
const socket = io('http://localhost:3030');
const app = feathers()
.configure(hooks())
.configure(socketio(socket));
const messageService = app.service('users');
messageService.on('created', message => {
console.log('Created a message', message);
done();
});
socket.on('connection', () => {
//
// The messageService is not connected yet
// so messages.filters.js will not fire
//
// Giving it a chance to connect with setTimeout does work...
// setTimeout(() => {
rp({
method: 'POST',
url: 'http://localhost:3030/users',
body: {
test: 'Message from REST'
},
json: true
});
// }, 500);
});
});
});
I have tried replacing the socket.on with these as well:
messageService.on('connection'
service.on('connection' (based on Node.js EventEmitter)
and so on...
Edit
I have since found service.on('newListener' works however it is being triggered many times. I need to track down the single connection:
const messageService = app.service('users');
messageService.on('created', message => {
console.log('Created a message', message);
done();
});
It's simply service.on('newListener'.
https://nodejs.org/api/events.html#events_event_newlistener
Listeners registered for the 'newListener' event will be passed the event name and a reference to the listener being added.
However, when I implemented this I found that it is listening to 5 different events. So, you need to filter those down:
service.on('newListener', (event, listener) => {
if (event === 'created') {
rp({
method: 'POST',
url: 'http://localhost:3030/users',
body: {
test: 'Message from REST'
},
json: true
});
}
});

How to return a nested json in node.js mysql in a single query

I'm trying to create an api that will return a nested json, coming from two related tables student and studentSubjects
[{
id:"1",
name: "John",
subjects: [{
id:"1",
subject: "Math"
},
{
id:"2",
subject: "English"
}
]
},
{
id:"2",
name: "Peter",
subjects: [{
id:"1",
subject: "Math"
},
{
id:"2",
subject: "English"
}
]
}]
My code looks like this:
this.get = function(res){
db.acquire(function(err, con){
con.query('SELECT * FROM students', function(err, results){
if (err){
res.send({status: 0, message: 'Database error'});
}else{
res.send({status: 1, data: results});
}
})
con.release()
})
}
I know the query should have joins, but it only returns single row. I tried also to make a loop, it won't work because its async
Thanks for your help!!
You cannot create a nested JSON from a MySQL query because it will always return a flat result.
Anyway, to create a nested JSON you should create multiple queries and insert the corresponding array object where needed.
You should really consider using Promises for creating nested queries because it will allow you to make asynchronous operations back to back.
Below code will also close the connection if an error occurs in any of the queries.
PS: I explained each step in the comments in the code below
Imagine having a database called 'School' and three tables called 'Student', 'Subject' and 'Link_student_subject'.
// Instantiate mysql datase variables
const mysql = require( 'mysql' );
const config = {
host : 'localhost',
user : 'root',
password : 'root',
database : 'school'
}
var connection;
// Instantiate express routing variables
const express = require('express');
const router = express.Router();
module.exports = router;
// Wrapper class for MySQL client
// - Constructor creates MySQL connection
// - Connection opened when query is done
// - Promise is resolved when executing
// - Promise returns reject in case of error
// - If promise resolved rows will be the result
class Database {
constructor( config ) {
this.connection = mysql.createConnection( config );
}
query( sql, args ) {
return new Promise( ( resolve, reject ) => {
this.connection.query( sql, args, ( err, rows ) => {
if ( err )
return reject( err );
resolve( rows );
} );
} );
}
close() {
return new Promise( ( resolve, reject ) => {
this.connection.end( err => {
if ( err )
return reject( err );
resolve();
} );
} );
}
}
// Function that will execute a query
// - In case of an error: ensure connection is always closed
// - In case of succes: return result and close connection afterwards
Database.execute = function( config, callback ) {
const database = new Database( config );
return callback( database ).then(
result => database.close().then( () => result ),
err => database.close().then( () => { throw err; } )
);
};
// Instantiate Database
var database = new Database(config);
// Express routing
router.get('/students', function (req, res) {
// Variables - Rows from Students & Subjects & Link_student_subject
let rows_Students, rows_Subjects, rows_Link_Student_Subject;
// Create a Promise chain by
// executing two or more asynchronous operations back to back,
// where each subsequent operation starts when the previous operation succeeds,
// with the result from the previous step
Database.execute( config,
database => database.query( "select a.*, null as subjects from student a" )
.then( rows => {
rows_Students = rows;
return database.query( "select * from subject" )
} )
.then( rows => {
rows_Subjects = rows;
return database.query( "select * from link_student_subject" )
} )
.then( rows => {
rows_Link_Student_Subject = rows;
} )
).then( () => {
// Create your nested student JSON by looping on Students
// and inserting the corresponding Subjects array
for (let i = 0; i < rows_Students.length; i++) {
let arraySubjects = [];
for (let x = 0; x < rows_Link_Student_Subject.length; x++) {
if(rows_Students[i].id == rows_Link_Student_Subject[x].id_student){
arraySubjects.push(searchObjInArray(rows_Subjects, "id", rows_Link_Student_Subject[x].id_subject));
}
}
rows_Students[i].subjects = arraySubjects;
}
res.send(JSON.stringify(rows_Students));
} ).catch( err => {
// handle the error
res.send(err);
});
});
// Function - search if object in array has a value and return that object
function searchObjInArray(array, arrayProp, searchVal){
let result = null;
let obj = array.find((o, i) => {
if (o[arrayProp] == searchVal) {
result = array[i];
return true; // stop find function
}
});
return result;
}
If you run this code with node and go to "127.0.0.1/students" it will return exactly the same JSON as in your question.
All credits and extra info on MySQL and promises - https://codeburst.io/node-js-mysql-and-promises-4c3be599909b
MySQL 5.7+ has a JSON datatype that you can leverage for your "subjects" field. Here's a great tutorial on how to use that:
https://www.sitepoint.com/use-json-data-fields-mysql-databases/