Create databases connection on startup - sails.js - mysql

I know you can use Waterline and the models approach, but the issue is that I have to access more than one database to generate a response. Besides the models approach makes it difficult to think on a solution due to the fact that the data is joined and infered from different tables under the different databases.
Therefore, I would like to know how can I open a DB connection using the mysql or mysql2 native driver and reuse it all over the app. Where is the most suitable place, a hook, etc.? How can I close them when the app goes down?
TA

A custom hook will be the best for you. For Redis, I created one like this:
api/hooks/redis/index.js
var redisModule = require('redis');
module.exports = function connectToRedis(sails) {
return {
connectToRedis: function (callback) {
var hook = this;
var config = sails.config.connections.redis;
var redisClient = redisModule.createClient(config.port, config.host);
hook.initAdapters();
redisClient.on('connect', function () {
sails.log.verbose('Connection to redis was succesfull!');
// select db
redisClient.select(config.db, function (err) {
if (err) {
return callback(err);
}
sails.adapters.redis = redisClient;
callback();
});
});
redisClient.on('error', function (error) {
sails.log.error(error);
callback();
});
},
initAdapters: function () {
if (sails.adapters === undefined) {
sails.adapters = {};
}
},
// Run automatically when the hook initializes
initialize: function (cb) {
var hook = this;
hook.connectToRedis(function() {
cb();
});
},
};
};

Related

MySQL querying in NodeJS

I'm trying to write an app that will check whether or not an webpage has changed.
I'm using NodeJs mysql because I'm familiar with them.
So at the moment, I have the problem that my query is too 'slow', so my function will not return true, even though it should. Can I force my app to wait for the query somehow?
Edit:
checkEntry(webpage, callback) {
var oldPage;
this.mysql.query('SELECT OldWebpagecol FROM Web_Scraping.OldWebpage WHERE idOldWebpage = 15', (err, rows) => {
if (err) console.log(err);
oldPage = rows[0].OldWebpagecol;
if (webpage === oldPage) {
return true;
}
return false;
})
}
You can use async/await when you make a request.
For example,
(async () => {
let response = await fetch(‘/api/users’);
})();

Sequelize caching with node-cache-manager

I'm trying to introduce the simplest caching layer in my webapp, all I want to do is cache the results from a few queries for 24 hours as that is how often the DB receieves new data. I'm trying to use node-cache-manager (which looks great) but struggling! I don't think I completely understand how it should be implemented into sequelize. The example is using Mongoose and Mongo by the looks of it:
function responder(res) {
return function respond(err, data) {
var startTime = moment(res.req._startTime);
var diff = moment().diff(startTime, 'ms');
if (err) {
err.status = 500;
res.render('error', {error: err});
} else {
data.requestTime = diff;
res.render('users/show', data);
}
};
}
function fetchUser(id, cb) {
var cacheKey = 'user_' + id;
memoryCache.wrap(cacheKey, function (cacheCb) {
console.log("Fetching user from slow database");
User.get(id, cacheCb);
}, cb);
}
router.get('/:id', function (req, res) {
fetchUser(req.param('id'), responder(res));
});
I'm using MySQL and currently have the example below, but I can see the query hitting the database every time, it does not seem to be persisting or creating the cache...:
function responder(res) {
return function respond(err, data) {
var startTime = moment(res.req._startTime);
var diff = moment().diff(startTime, 'ms');
if (err) {
err.status = 500;
res.json({valuesCount: err});
} else {
data.requestTime = diff;
res.json({valuesCount: res});
}
};
}
function fetchCount(req, cb) {
var cacheKey = 'allDatabaseRecords',
table = sequelize.import('extracts');
memoryCache.wrap(cacheKey, function (cacheCb) {
console.log("Fetching count from slow database");
table.count().then(cacheCb);
}, cb);
}
router.post('/extract-tool/data-filter', function(req, res) {
var table = sequelize.import('extracts');
fetchCount(req, responder(res));
});
I've tried following the documentation and the example, but this as mentioned, this still hits the DB every time! Any help is MASSIVELY appreciated!!!
I could not get this to work as per the examples given, I had to specifically set and get the key value pair to and from the cache each time.
Now it works fine, structured as below:
memoryCache.get(cacheKey, function(err, result) {
if (result !== undefined){res.json({billingAddress12LastName: result});}
});
memoryCache.wrap(cacheKey, function (cacheCb) {
console.log("Fetching count from slow database");
table.count().then(data);
memoryCache.set(cacheKey, data, {ttl: ttl}, function(err) {
res.json({valuesCount: data});
});
}, cb);
Still don't understand how the wrap function works and what its for as I'm explicitly setting and getting the cache values now, would appreciate some comments still :).

sails.js run multiple command query in mysql

I have problem with executing multiple sql query on sails.js
I would like to run a script from a file on sails lift.
I wrote a custom handling inside /config/bootstrap.js
module.exports.bootstrap = function(cb) {
fs.readFile('SQL\\StoredProcedures\\MyProcedure.sql', 'utf8', function (err,data) {
if (err) {
console.log(err);
}
console.log(data);
MyModel.query(data, function(err, records){
if(err){
console.log(err);
}
});
});
// It's very important to trigger this callback method when you are finished
// with the bootstrap! (otherwise your server will never lift, since it's waiting on the bootstrap)
cb();
};
The problem is, that .query() function does not accept multiple queries inside. I mean, it does accept when in my file I have:
DROP PROCEDURE IF EXISTS `MyProcedure`;
But it won't accept while in my file I have:
DROP PROCEDURE IF EXISTS `MyProcedure`;
SELECT * FROM something;
Is there a way to execute this file?
This can be done by setting up your config/datastores.js like this:
module.exports = {
default: {
multipleStatements: true
}
}
By adding this into your configuration, you're letting Sails handle the parsing and execution of your queries.
The issue is that by default, the Node MySQL driver doesn't allow running multiple queries at once. This is to guard against SQL injection.
For more complete explanation, see #sgress454's comment here: https://github.com/balderdashy/sails/issues/4192
You could split the lines in your file, and run all the queries one by one?
var fs = require('fs');
module.exports = function (cb) {
fs.readFile('SQL\\StoredProcedures\\MyProcedure.sql', 'utf8', function (err,data) {
if (err) {
sails.log.error(err);
return cb(); // you have no queries to run
}
sails.log.info(data);
var queries = data.split('\n');
// async is injected into the global scope by sails, so no need to require it
// if you don't need your queries to run in order, then you can use async.each
// instead of async.eachSeries
// https://github.com/caolan/async#each
async.eachSeries(
queries,
function (query, cb) {
MyModel.query(query, function (err, records) {
if (err) {
sails.log.error(err);
return cb();
// If you don't want other queries to execute in case of an error, then
// return cb(err);
}
});
},
function (err) {
if (err) { sails.log.error(err); }
return cb();
}
);
});
};

How to promisify a MySql function using bluebird?

Some time ago I decided to switch from PHP to node. In my first projects I didn't want to use any ORM since I thought that I didn't need to complicate my life so much learning another thing (at the moment I was learning node and angular) therefor I decided to use mysql package without anything else. It is important to say that I have some complex queries and I didn't want to learn from sctratch how to make them work using one of the 9000 ORM node have, This is what I've been doing so far:
thing.service.js
Thing.list = function (done) {
db.query("SELECT * FROM thing...",function (err,data) {
if (err) {
done(err)
} else {
done(null,data);
}
});
};
module.exports = Thing;
thing.controler.js
Thing = require('thing.service.js');
Thing.list(function (err,data) {
if (err) {
res.status(500).send('Error D:');
} else {
res.json(data);
}
});
how can I promisify this kind of functions using bluebird ? I've already tried but .... here I am asking for help. This is what I tried
var Thing = Promise.promisifyAll(require('./models/thing.service.js'));
Thing.list().then(function(){})
I have done this way and it is working fine.
const connection = mysql.createConnection({.....});
global.db = Bluebird.promisifyAll(connection);
db.queryAsync("SELECT * FROM users").then(function(rows){
console.log(rows);});
I have never had much luck with promisifyAll and IMO I prefer to handle my internal checks manually. Here is an example of how I would approach this:
//ThingModule
var Promises = require('bluebird');
Things.list = function(params) {
return new Promises(function(resolve, reject) {
db.query('SELECT * FROM thing...', function(err, data) {
return (err ? reject(err) : resolve(data));
});
});
}
//usage
var thinger = require('ThingModule');
thinger.list().then(function(data) {
//do something with data
})
.error(function(err) {
console.error(err);
})
You can also create a function that fires SQL like this :-
function sqlGun(query, obj, callback) {
mySQLconnection.query(query, obj, function(err, rows, fields) {
if (err) {
console.log('Error ==>', err);
// throw err;
return (err, null);
}
console.log(query)
if (rows.length) {
return callback(null, rows);
} else {
return callback(null, [])
}
});
}
Where mySQLconnection is the connection object you get after mysql.createConnection({}).
After that, you can promisify the function and use the promise like below :-
var promisified = Promise.promisify(sqlGun);
promisified(query, {}).then( function() {} );

Auto-create mysql table with StrongLoop

I am trying to use Strongloop with MySql but cannot figure out how to migrate or automatically create tables into a MySql database.
Is there at least a way to export the models into MySql schemas or do I have to manually create the tables?
I've been trying with the mysql demo app, and going over the docs for a while but no luck - http://docs.strongloop.com/display/DOC/MySQL+connector
Thanks!
I created /server/boot/autoupdate.js. It runs when the app boots. It loads "model-config" and "datasources" JSON and migrates or updates all models to the datasources defined for them.
# /server/boot/autoupdate.js
module.exports = function(app) {
var path = require('path');
var models = require(path.resolve(__dirname, '../model-config.json'));
var datasources = require(path.resolve(__dirname, '../datasources.json'));
function autoUpdateAll(){
Object.keys(models).forEach(function(key) {
if (typeof models[key].dataSource != 'undefined') {
if (typeof datasources[models[key].dataSource] != 'undefined') {
app.dataSources[models[key].dataSource].autoupdate(key, function (err) {
if (err) throw err;
console.log('Model ' + key + ' updated');
});
}
}
});
}
function autoMigrateAll(){
Object.keys(models).forEach(function(key) {
if (typeof models[key].dataSource != 'undefined') {
if (typeof datasources[models[key].dataSource] != 'undefined') {
app.dataSources[models[key].dataSource].automigrate(key, function (err) {
if (err) throw err;
console.log('Model ' + key + ' migrated');
});
}
}
});
}
//TODO: change to autoUpdateAll when ready for CI deployment to production
autoMigrateAll();
//autoUpdateAll();
};
You can simply migrate models by adding following lines to your server.js file before app.start method:
app.datasources['mySqlConnection'].automigrate(['orders','customers', 'User', 'ACL'], function(err) {
console.log(err);
});
Add models to the array as per your need.
Run the application by slc run.
Note: mySqlConnection is the connection name, replace it by your own connection name.
To update and/or create all mysql tables for your models:
var dataSource = app.dataSources.mysql;
dataSource.autoupdate(null, function (err) {
if(err) return cb(err);
return cb();
});
LoopBack calls it auto-migration. Check these links and search for that term:
Recipes for LoopBack Models, part 5 of 5: Model Synchronization with Relational Databases
Data sources and connectors
In my case, I manually created MySQL tables and then created the models. For existing MySQL tables, I create the models where property names are the same as MySQL field's names.
So here are my steps in using StrongLoop LoopBack with MySQL Database:
Create MySQL Database and Tables (or use existing database).
Install MySQL connector using npm install loopback-connector-mysql --save
Add your MySQL Database details on datasources.json file.
Create a model for each table using slc lb model tablename -i OR edit models.json file and add the properties manually. (document: http://docs.strongloop.com/display/DOC/Creating+a+LoopBack+application#CreatingaLoopBackapplication-Creatingmodels)
Properties' names should be the same as MySQL field's names (more information on mapping MySQL to JSON data types: http://docs.strongloop.com/display/DOC/MySQL+connector#MySQLconnector-MySQLtoJSONtypes)
In the same kind of issue, if you need to automatically create a database, you can use the createDatabase option in your dataSource JSON file.
"mysql": {
"host": "localhost",
"port": 0,
"database": "db",
"username": "root",
"password": "",
"name": "mysql",
"connector": "mysql",
"debug": false,
"createDatabase": true
}
So you don't need to write yourself the queries to create the base.
Hope it helps.
jduhls answer is beautiful, but I needed to tweak it slightly to add some static data into tables. Here's my tweaked version, along with an example of loading data into a simple SystemSettings table (id, settingName, settingValue):
var async = require('async');
var SYSTEM_SETTINGS = [
{
"settingName": "mustPayInAdvance",
"settingValue": "false",
}
];
module.exports = function(app) {
var path = require('path');
var models = require(path.resolve(__dirname, '../model-config.json'));
var datasources = require(path.resolve(__dirname, '../datasources.json'));
var modelUpdates = [];
function buildModelListForOperation(){
Object.keys(models).forEach(function(key) {
if (typeof models[key].dataSource != 'undefined') {
if (typeof datasources[models[key].dataSource] != 'undefined') {
modelUpdates.push({operation: app.dataSources[models[key].dataSource], key: key});
}
}
});
}
function createStaticData() {
app.models.SystemSettings.create(SYSTEM_SETTINGS, function(err, created) {
if (err)
throw err;
else
console.log('Sample data was imported.');
});
}
function processModelsAndData(operationType) {
buildModelListForOperation();
// Create all models
async.each(modelUpdates, function(item, callback) {
item.operation[operationType](item.key, function (err) {
if (err) throw err;
console.log('Model ' + item.key + ' migrated');
callback();
});
}, function (err) {
if (err) throw err;
createStaticData();
});
}
//TODO: change to 'autoupdate' when ready for CI deployment to production
processModelsAndData('automigrate');
};
i discovered an easy way to accomplish this task. The reference link is: Clique Here
You can use prototype or not, in my case, i do nott used.
For the documentation, you should use:
ds.autoupdate (models, function (error) {
if (!error) {
   console.log( "Updated models.");
   }else{
   console.log( "An error has occurred:" + error);
   }
   ds.disconnect();
});
Where:
var path = require ( 'path');
var app = require (path.resolve (__ dirname, '../server/server'));
var ds = app.datasources.x;
and x is datasource attribute name, example of /server/datasources.json:
{
  "x": {
    "Host": "localhost"
    "Port": 3306,
    "Database", "loopapp"
    "Password": "",
    "Name": "x"
    "User", "root"
    "Connector": "mysql"
  }
}
Note (1): Models can be the string model name or the array of string (models names).
Note (2): If you prefer not to put models, all models of the file whose base attribute equals "PersistedModel", will be updated.
With that, i used like this:
autoupdate function () {
ds.autoupdate (function (error) {
     if (!error) {
         console.log( "Updated all models");
      }else {
         console.log( "An error has occurred:" + error);
      }
      ds.disconnect();
   });
}
and i called the: autoupdate();
You can put this code in a file.js and call the command line: node file.js.
If you want this file to be called every time you start the program, put it on /server/boot/file.js path.
Obviously, if you want to use automigrate, only replace the autoupdate word in the code above, by automigrate.