I'm working on creating a user registration system for a website that I am working on but I am running into a few issues.
I'm trying to stay away from having to nest callbacks because it gets kind of messy, What I need help with is finding if there is a way to create synchronous queries with node-mysql
Here's what I'm trying to achieve.
connection.query("select 1 as email from users where email = " + connection.escape(email), function(err, rows, fields) {
if(err) {
var error = {
error_message: err.code,
error_number: err.errno
};
return res.send(error);
}
if(rows.length > 0) {
var error = {
message: 'Email Address is Taken',
code: 2
};
return res.send(error);
}
});
connection.query("insert into users (email, password) values ("+connection.escape(email)+", "+connection.escape(hash)+")", function(err, rows, fields) {
if(err) {
var error = {
error_message: err.code,
error_number: err.errno
};
return res.send(error);
}
});
My goal is to have the first query run and if that returns a row then to not execute the second query but if the first query returns 0 rows then continue and run the second query.
I know I can nest the second query inside the first query and put if in an else but that's what I don't want to do because while I have those two queries I also have it set u to use bcrypt to encrypt the password which would have to be nested as well.
Is there a way to write it so that I don't need to nest the two queries or is nesting them going to be my only option?
You could simply use a module for node that provide synchronous functions.
Here you'll find a module that provide sync/async functions to deal with mysql.
https://github.com/Will-I4M/node-mysql-libmysqlclient
Here is how you could use it in order to execute a synchronous query :
var config = require("./config.json") ;
var mysql = require('mysql-libmysqlclient') ;
var client = mysql.createConnectionSync(config.host, config.user, config.password, config.database) ;
var query = "SELECT * FROM Users ;" ;
var handle = client.querySync(query) ;
var results = handle.fetchAllSync() ;
console.log(JSON.stringify(results)) ;
As jfriend00 said above, if you're going to develop in node.js, then you MUST become comfortable with writing async code.
"chained promises" is probably your best bet:
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise/then
http://html5hive.org/node-js-quickies-working-with-mysql/
ADDENDUM:
This tutorial illustrates promise chaining with node.js SQL queries. It also discusses how you can use Q and/or Step to simplify your code:
http://code.tutsplus.com/tutorials/managing-the-asynchronous-nature-of-nodejs--net-36183
There could be conditions when you need sync queries (or at least for readability or simplicity). I do not agree with that everything have to be done in the async way at node.js.
I have tested a lot of available solutions and ended up with the "sync-mysql" module (https://github.com/ForbesLindesay/sync-mysql).
Easy to install and use, but not that good in performance (especially if you have to do a lot of sub-queries).
People talk about chained promises here, but give no example code. Here's what we did in a training session today to run a sequence of SQL statements synchronously using promises (credits to trainer and trainees), no additional libraries required:
let mysql = require("mysql");
let conn = mysql.createConnection({host: "localhost", user: "app",
password: "*******", database: "people"});
//returns a promise that resolves to a result set on success
function execSql(statement, values) {
let p = new Promise(function (res, rej) {
conn.query(statement, values, function (err, result) {
if (err) rej(err);
else res(result);
});
});
return p;
}
function insertUserAndFriend(user, friend) {
execSql("INSERT INTO usr (nam) VALUES (?);",[user])
.then(function (result) {
console.log("Inserted " + user);
return execSql("SELECT id, nam from usr where nam = ?;", [user]);
})
.then((result) => {
let { id, nam } = result[0];
console.log("Result: " + id + " " + nam);
return execSql("INSERT INTO friend (usr,nam) VALUES (?,?);",
[id, friend]);
})
.then((result) => {
console.log("Inserted " + friend);
})
.catch((err) => {
console.log("Error: " + err);
})
.finally(function (res) {
conn.end();
});
}
conn.connect(function (err) {
if (err) throw err;
insertUserAndFriend("Bonnie", "Clyde");
});
For reference, here is the create.sql of the toy database:
DROP TABLE IF EXISTS friend;
DROP TABLE IF EXISTS usr;
CREATE TABLE usr (
id INT unsigned NOT NULL AUTO_INCREMENT,
nam VARCHAR(50) UNIQUE NOT NULL,
PRIMARY KEY (id)
);
CREATE TABLE friend (
usr INT unsigned NOT NULL,
FOREIGN KEY (usr) REFERENCES usr (id),
nam VARCHAR(50) UNIQUE NOT NULL
);
For most things I code in node.js, I like asynchronous code. However, I completely understand that asynchronous code is extremely and dangerously incompatible with the need to write and maintain business logic. I've used a variety of alternative methods. The modules to make things synchronous still leave you with data scoping issues that complicate things. Promises worked best for me. Using that approach, I found myself practically writing an interpreter for a new language on top of JavaScript. I may seem absurd but the most practical and safest method for me ended up being to use the shelljs module and the mysql shell client. It's not great execution performance but it makes for much better developer performance and keeps business logic clear and orderly, as is crucial for business logic. Here's snippet of code to give an example of some of what I created:
var shell = require('shelljs');
module.exports = {
user: '',
password: '',
runSql: function (sql) {
var command = "echo '" + sql.replace(/'/g, "'\\''") + "' | mysql -u" + this.user.replace(/'/g, "'\\''") + " -p'" + this.password.replace(/'/g, "'\\''") + "'";
var raw = shell.exec(command, {silent: true}).stdout;
//console.log( 'BASH -> MySQL YIELD: "' + raw + '"' );
if (raw.substr(0, 5) === 'ERROR') {
console.log('ERROR Resulting from: ' + sql + '\n' + raw);
return [];
}
var rows = raw.split('\n');
var names = [];
for (var r = 0; r < rows.length; r += 1) {
columns = rows[r].split('\t');
// Capture column names
if (r === 0) {
names = columns;
continue;
}
// Reformat row into named valued
var fields = {};
for (var c = 0; c < columns.length; c += 1) {
fields[names[c]] = columns[c];
}
rows[r] = fields;
}
// Eliminate extraneous first and last rows
rows.splice(0, 1);
rows.splice(rows.length - 1, 1);
return rows;
},
}
Symplest solution I could find is the sync-sql module.
Install the required modules
npm install sync-sql
npm install sync-mysql
Sample index.js
const Mysql = require('sync-mysql')
const connection = new Mysql({
host:'localhost',
user:'root',
password:'password',
database:'demo'
})
var result = connection.query('SELECT NOW()')
console.log(result)
https://www.geeksforgeeks.org/how-to-run-synchronous-queries-using-sync-sql-module-in-node-js/
I know I am late to this party but I feel I can help people like me that needed a way to use MySQL in a synchronous way.
Answer is here.
Oh and I had to add a pool.end(); after my query code to close the connection and stop the infinite wait loop. See here.
Related
Every Dialogflow chatbot is associated with a project. There are apparently no limits for the number of chatbots that could be created. But the resources that they may use, like fulfillment functions, are billed. Working with a trial account in GCP I can have only 3 billable projects (unless I ask for an upgrade). But nothing denies the use of resources like functions or datastore that are hosted by a billable account by another project. So, I am trying to use an fulfillment function for a chatbot hosted in one of these billable projects. There's no problem. I can invoke the function and it works. But I need to make a query in datastore and it does not work in this context. I ran the same code in another function in the same project and the query results are ok. But the query does not run inside an intent mapped function (function topic(agent) {...}).
Here are the excerpts for these two functions:
(1) Returns query results
exports.helloWorld = (req, res) => {
const query = datastore
.createQuery('Synonym')
.filter('synonym', "annual salary");
datastore.runQuery(query, (err, entities, info) => {
// entities = An array of records.
// Access the Key object for an entity.
var message = "<h3>Synonyms for ANNUAL SALARY</h3>";
for (const entity of entities) {
const entityKey = entity[datastore.KEY];
message += entityKey.name + " is synonym of " + entity["synonym"] + "</br>";
}
res.status(200).send(message);
});
};
returns:
Synonyms for ANNUAL SALARY
annual is synonym of annual salary
annual salary is synonym of annual salary
...
(2) Does not return results
exports.HRChatbotFulfillment = functions.https.onRequest((request, response) => {
const agent = new WebhookClient({ request, response });
...
function topic(agent) {
const topic = agent.parameters['topic'];
var message = "Synonyms for " + topic + "\n";
const query = datastore.createQuery('Synonym').filter('synonym', topic);
datastore.runQuery(query, (err, entities, info) => {
// entities = An array of records.
// Access the Key object for an entity.
for (const entity of entities) {
const entityKey = entity[datastore.KEY];
message += entityKey.name + " is synonym of " + entity["synonym"] + "\n";
}
});
agent.add(message);
}
returns:
"Synonyms for annual salary"
Fulfillment status
Webhook execution successful
I will appreciate any suggestions.
Thank you.
UPDATE
It takes me some time to figure out how to deal with this asynchronous calls but I've got it:
const runQuery = (query) => new Promise((resolve, reject) => {
datastore.runQuery(query,(err, entities, info) => {
if (err) {
return reject(err)
}
return resolve([entities,info])
})
});
async function topic(agent) {
const topic = agent.parameters['topic'];
var message = "Synonyms for " + topic.toUpperCase() + "\r\n";
const query = datastore
.createQuery('Synonym')
.filter('synonym', topic);
try {
const [entities,info] = await runQuery(query);
for (const entity of entities)
{
const entityKey = entity[datastore.KEY];
message += entityKey.name + "\r\n";
}
}
catch(err){
agent.add("Err: " + err);
}
agent.add(message);
}
Thank you all for your attention.
Quite the odd issue here.. I think this may be more of a problem of debugging, however I'm going to post in-case it is truly an issue and I'm quite frankly at my wits end anyway. I am doing a basic React.js/next.js form that takes a few inputs and adds them to state, then using axios sends the update to the api, which then makes a query insert or update to MySQL. The problem is, this Insert/Update doesn't work and I can't get any error output besides generic ETIMEDOUT from time to time, which I'm not even sure are related. I had this fixed before but am still unsure what I did. ALL other queries on the site work fine, the connection to the MySQL (AWS RDS) database is just fine.
My theories are A) the final query syntax has a silly issue causing this to just get lost in the abyss, or B) there's some server side code trying be run client side that I don't quite understand. (have also gotten the module 'fs' not found), or C) an async issue that I am not weathered enough in next.js to fix. And before you say it, yes there is data to be updated in the table, it is not trying to update the same data and thus bypassing the update. It is new data, every time I test.
NOTE-- I should also say, this code works PERFECT on my local osx environment. This ONLY happens when I try to run this on my Vercel deployment environment. This is important to know. The Database and Code are the EXACT same between both environments.
Without further ado, some code:
To save code display, lets assume our values are in state and ready to go to the API, as I know for a fact they are, and they make it to the actual query.
handleSubmit - gets run when the form is submitted.
const handleSubmit = (e) => {
e.preventDefault();
// Loop data, create a list of IDs for the Delete and an
// array of array of arrays for the insert.
let segmentItemIDList = [];
const segmentItemArray = [];
originalSegmentItemList = originalSegmentItemList.join(',')
segmentItemState.map((val, idx) => (
segmentItemArray[idx] = [
segmentItemState[idx].segmentID,
Number(segmentItemState[idx].chronologicalOrder),
Number(segmentItemState[idx].releaseOrder),
segmentItemState[idx].name,
segmentItemState[idx].typeID
]
))
let action = 'updatesegmentitem'
axios.post('/api/list', { action, segmentItemArray })
.then((result) => {
action = 'deletesegmentitem'
axios.post('/api/list', { action, originalSegmentItemList })
.then((result) => {
alert("Updated!!");
})
.catch(error => console.error('Error:', error));
})
.catch(error => console.error('Error:', error));
}
api/list (assume it gets into this block, because it does)
else if(req.body.action == 'updatesegmentitem') {
console.log("2. API updatesegmentitem req.body: ", req.body);
const segmentItemArray = req.body.segmentItemArray;
console.log("SegmentItemArray: ", segmentItemArray);
try {
if(Array.isArray(segmentItemArray) && segmentItemArray.length > 0) {
console.log("Inside IsArray: ", segmentItemArray);
const segmentItemInsertResults = await insertBatchSegmentItems(segmentItemArray);
res.send(segmentItemInsertResults);
} else {
res.send(true);
}
} catch (e) {
res.send('error');
}
insertBatchSegmentItems (mysql query) .. Sometimes I get the console logs in here, sometimes not..
export async function insertBatchSegmentItems(segmentItemData) {
let mysqlConnection = mysql.createConnection({
host: process.env.MYSQL_HOST,
database: process.env.MYSQL_DATABASE,
user: process.env.MYSQL_USER,
password: process.env.MYSQL_PASSWORD,
debug: false,
});
mysqlConnection.connect();
const insertSQL = 'INSERT INTO segmentItem (segmentID, chronologicalOrder, releaseOrder, name, typeID) VALUES ?'
try {
await mysqlConnection.query(insertSQL, [segmentItemData], function(err, result) {
console.log("Connex Query Inside Result: ", result);
if (err) throw err;
//mysqlConnection.destroy();
return result;
});
} catch (e) {
console.log("ERROR: ", e);
//mysqlConnection.destroy();
return e;
}
return true;
}
Please excuse my mess, I have been trying so many different things to try and get this to work but it will be cleaned up after a solution has been found.
Whenever I run into similar situations, I usually drop out exception handling and let it fail hard. It might give you a better insight of where it's happening. Good luck!
I am using this approach to upload images to aws s3 bucket:
https://grokonez.com/aws/angular-4-amazon-s3-example-how-to-upload-file-to-s3-bucket
This works fine as an individual task but as far as I rely on the result which is coming a bit later due to async behavior may be. I would like the next task to be executed just after the confirmation.
upload() {
let file: any;
// let urltype = '';
let filename = '';
// let b: boolean;
for (let i = 0 ; i < this.urls.length ; i++) {
file = this.selectedFiles[i];
// urltype = this.urltype[i];
filename = file.name;
const k = uuid() + '.' + filename.substr((filename.lastIndexOf('.') + 1));
this.uploadservice.uploadfile(file, k);
console.log('done');
// console.log('file: ' + file + ' : ' + filename);
// let x = this.userservice.PostImage('test', file);
// console.log('value of ' + x);
}
// return b;
}
fileupload service:
bucket.upload(params, function (err, data) {
if (err) {
console.log('There was an error uploading your file: ', err);
return false;
}
console.log('Successfully uploaded file.', data);
return true;
}).promise();
}
Here, done is getting executed before the file upload is done.
I think you should check out a tutorial for asynchronous programming and try to play around with couple of examples using simple timeouts to get the hang of it and then proceed with more complex things like s3 and aws.
Here is how I suggest you start your journey:
1) Learn the basic concepts of asynchronous programming using pure JS
https://eloquentjavascript.net/11_async.html
2) Play around with your own examples using callbacks and timeouts
3) Replace the callbacks with Promises
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Using_promises
4) Do it the "angular" way with rxjs Observables (similar to JS Observable)
http://reactivex.io/rxjs/class/es6/Observable.js~Observable.html
PS: To be more concrete:
Your code fails because the following line is executed in an asynchronous manner. Thus the code will call your uploadfile function and will immedietly continue executing without waiting.
this.uploadservice.uploadfile(file, k);
Once you follow all the points I described above you will be able to do something like this (using a Promise):
this.uploadservice.uploadfile(file, k)
.then( result => {
console.log('Upload finished');
})
.catch(error => {
console.log('Something went wrong');
});
I'm pretty new to the node world and trying to migrate our php application to node. To be able to return all article data several different queries have to be done depending on the results of the first query. Currently my data object is empty as it's returned before the two queries run. How can I "chain" these queries using a promised based approach.
I found a library https://github.com/lukeb-uk/node-promise-mysql which I think could help but I have no idea how to implement it with my code.
exports.getArticleData = function(req, done) {
pool.getConnection(function(error, connection) {
if (error) throw error;
var data = {
article: {},
listicles: []
};
// Inital query
connection.query(
`SELECT article_id, title, is_listicle_article, FROM com_magazine_articles AS article WHERE article_id = ${req
.params.articleId}`,
function(error, results) {
data.article = results;
}
);
// This query should only be excuted if is_listicle_article = true
if (data.article.is_listicle_article) {
connection.query(
`SELECT * FROM com_magazine_article_listicles WHERE article_id = ${req.params
.articleId}`,
function(error, results) {
data.listicle = results;
}
);
}
// More queries depending on the result of the first one
// ....
// ....
// Callback with the data object
done(data);
connection.release();
});
};
What would be the best approach to execute queries based on other queries results? Any help is really appreciated.
The functionality you are looking for is Promise chaining, it allows you to construct a sequence of promises, each depending on the result of the previous value. Applying this to your code, you would get something like this:
exports.getArticleData = function(req, done) {
pool.getConnection(function(error, connection) {
if (error) throw error;
// Inital query
return connection.query(
`SELECT article_id, title, is_listicle_article, FROM com_magazine_articles AS article WHERE article_id = ${req
.params.articleId}`
).then((rows) => {
return Promise.all(rows.map((article) => {
if (article.is_listicle_article) {
return connection.query(
`SELECT * FROM com_magazine_article_listicles WHERE article_id = ${req.params
.articleId}`
);
} else {
return Promise.resolve(null);
}
}));
}).then((res) => {
connection.release();
done(res.filter(function(i){ return i != null; }));
})
// This query should only be excuted if is_listicle_article = true
// More queries depending on the result of the first one
// ....
// ....
// Callback with the data object
connection.release();
});
};
Obviously since I don't have all of your code, I couldn't verify this example, but this should be roughly the functionality you are looking for. That said, I think there were a couple of mistakes you should watch out for in your example code:
connection.query() returns a promise (aka doesn't need a callback function). Use this functionality to your advantage- it will make your code prettier.
connection.query() returns an array of rows, not a single value. You seemed to ignore this in your example code.
Try not to save things into a variable when using promises, it isn't necessary. To remedy this, read more into the Promise API (Promise.resolve(), Promise.reject(), Promise.any(), Promise.catch(), Promise.all()) etc.
It seems like these SQL queries could easily be combined into a single query. This will be way more efficient that performing two operations. Not sure if this is the case with the remaining queries you wish to use, but definitely something to look out for.
I'm using sails 0.11.2. With the latest sails-mongo adapter.
I have a very large database (gigabytes of data) of mainly timestamp and values. And i make queries on it using the blueprint api.
If I query using localhost:1337/datatable?limit=100000000000 the nodejs hangs on 0.12 with a lot of CPU usage, and crashes on v4. It crashes on the toJSON function.
I've finded out that i need to make multiple queries on my API. But I don't how to proceed to make it.
How can i make multiple queries that "don't explode" my server?
Update:
On newer version 0.12.3 with latest waterline and sails-mongo, the queries goes much smoother. The crashes on the cloud was that I didn't had enough RAM to handle sailsjs and mongodb on same T2.micro instance.
I've moved the mongodb server to a M3.Medium instance. And now the server don't crash anymore, but it freezes. I'm using skip limit and it works nicely for sails.js but for mongodb is a great waste of resources!
Mongodb make an internal query using limit = skip + limit. and then moves the cursor to the desired data and returns. When you are making a lot's in pagination you are using lots of internal queries. As the query size will increase.
As this article explains, the way to get around the waste of resources in MongoDB is to avoid using skip and cleverly use _id as part of your query.
I did not use sails mongo but I did implement the idea above by using mongo driver in nodejs:
/**
* Motivation:
* Wanted to put together some code that used:
* - BlueBird (promises)
* - MongoDB NodeJS Driver
* - and paging that did not rely on skip()
*
* References:
* Based on articles such as:
* https://scalegrid.io/blog/fast-paging-with-mongodb/
* and GitHub puclic code searches such as:
* https://github.com/search?utf8=%E2%9C%93&q=bluebird+MongoClient+_id+find+limit+gt+language%3Ajavascript+&type=Code&ref=searchresults
* which yielded smaple code hits such as:
* https://github.com/HabitRPG/habitrpg/blob/28f2e9c356d7053884107d90d04e28dde75fa81b/migrations/api_v3/coupons.js#L71
*/
var Promise = require('bluebird'); // jshint ignore:line
var _ = require('lodash');
var MongoClient = require('mongodb').MongoClient;
var dbHandleForShutDowns;
// option a: great for debugging
var logger = require('tracer').console();
// option b: general purpose use
//var logger = console;
//...
var getPage = function getPage(db, collectionName, query, projection, pageSize, processPage) {
//console.log('DEBUG', 'filter:', JSON.stringify(query,null,2));
projection = (projection) ? projection['_id']=true : {'_id':true};
return db
.collection(collectionName)
.find(query)
.project(projection)
.sort({'_id':1}).limit(pageSize)
.toArray() // cursor methods return promises: http://mongodb.github.io/node-mongodb-native/2.1/api/Cursor.html#toArray
.then(function processPagedResults(documents) {
if (!documents || documents.length < 1) {
// stop - no data left to traverse
return Promise.resolve();
}
else {
if (documents.length < pageSize) {
// stop - last page
return processPage(documents);
}
else {
return processPage(documents) // process the results of the current page
.then(function getNextPage(){ // then go get the next page
var last_id = documents[documents.length-1]['_id'];
query['_id'] = {'$gt' : last_id};
return getPage(db, collectionName, query, projection, pageSize, processPage);
});
}
}
});
};
//...
return MongoClient
.connect(params.dbUrl, {
promiseLibrary: Promise
})
.then(function(db) {
dbHandleForShutDowns = db;
return getPage(db, collectionName, {}, {}, 5, function processPage(pagedDocs){console.log('do something with', pagedDocs);})
.finally(db.close.bind(db));
})
.catch(function(err) {
console.error("ERROR", err);
dbHandleForShutDowns.close();
});
The following two sections show how the code manipulates _id and makes it part of the query:
.sort({'_id':1}).limit(pageSize)
// [...]
var last_id = documents[documents.length-1]['_id'];
query['_id'] = {'$gt' : last_id};
Overall code flow:
Let getPage() handle the work, you can set the pageSize and query to your liking:
return getPage(db, collectionName, {}, {}, 5, function processPage(pagedDocs){console.log('do something with', pagedDocs);})
Method signature:
var getPage = function getPage(db, collectionName, query, projection, pageSize, processPage) {
Process pagedResults as soon as they become available:
return processPage(documents) // process the results of the current page
Move on to the next page:
return getPage(db, collectionName, query, projection, pageSize, processPage);
The code will stop when there is no more data left:
// stop - no data left to traverse
return Promise.resolve();
Or it will stop when working on the last page of data:
// stop - last page
return processPage(documents);
I hope this offers some inspiration, even if its not an exact solution for your needs.
1. run aggregate
const SailsMongoQuery = require('sails-mongo/lib/query/index.js')
const SailsMongoMatchMongoId = require('sails-mongo/lib/utils.js').matchMongoId
const fn = model.find(query).paginate(paginate)
const criteria = fn._criteria
const queryLib = new SailsMongoQuery(criteria, {})
const queryOptions = _.omit(queryLib.criteria, 'where')
const where = queryLib.criteria.where || {}
const queryWhere = Object.keys(where).reduce((acc, key) => {
const val = where[key]
acc[key] = SailsMongoMatchMongoId(val) ? new ObjectID(val) : val
return acc
}, {})
const aggregate = [
{ $match: queryWhere }
].concat(Object.keys(queryOptions).map(key => ({ [`$${key}`]: queryOptions[key] })))
// console.log('roge aggregate --->', JSON.stringify(aggregate, null, 2))
model.native((err, collection) => {
if (err) return callback(err)
collection.aggregate(aggregate, { allowDiskUse: true }).toArray(function (err, docs) {
if (err) return callback(err)
const pk = primaryKey === 'id' ? '_id' : primaryKey
ids = docs.reduce((acc, doc) => [...acc, doc[pk]], [])
callback()
})
})
2. run sails find by id`s
query = Object.assign({}, query, { [primaryKey]: ids }) // check primary key in sails model
fn = model.find(query) // .populate or another method
fn.exec((err, results) => { console.log('result ->>>>', err, results) })