MySQL super slow query after 3 attempts - mysql

I am making a Discord Level Bot, the bot will insert a random amount of XP each time a user's typed a message in the chat. To see a user's level I have a !level command. Like this :
sql.query(`SELECT * FROM WMembers where DiscordID = ${message.author.id}`, (err, rows) => {
if(err) console.log(err)
if(!rows[0]) return message.channel.send("The user has no XP!")
let xp = rows[0].XP
let level = rows[0].Level
let nextLevel = level * 40
message.channel.send(**Level: **${level - 1}\n**Points: **${xp} / ${nextLevel}`)
})
However, when I call the command more than 2-3 times,the queries start executing extremely slowly, taking 5 minutes to finally return the value.
Here is my sql code :
const pool = mysql.createPool({
host : keys.dbHost,
port : 3306,
user : keys.dbUser,
password: keys.dbPass,
database: keys.dbName
});
let sql = {};
sql.query = function(query, params, callback) {
pool.getConnection(function(err, connection) {
if(err) {
if (callback) callback(err, null, null);
return;
}
connection.query(query, params, function(error, results, fields) {
connection.release();
if(error) {
if (callback) callback(error, null, null);
return;
}
if (callback) callback(false, results, fields);
});
});
};
If someone can help me, I will greatly appreciate it. Thank you.

you clearly don't have enough memory for your database: https://dev.mysql.com/doc/mysql-monitor/4.0/en/system-prereqs-reference.html
MySQL's minimum requirements dictate 2GB of memory. you won't get far with 128MB. On that note, just like it has been adviced by #ExploitFate, limiting the number of connections your application can make to the database will also save you some memory

Related

How to handle a full pool of mysql connections in nodejs

In my Node script I use MySQL and to be able to handle multiple connections I use a connection pool.
Today I forgot to release a connection in the mysql pool. It took me a long time to figure out what the problem was because there was no error shown anywhere.
My code:
const mysql = require('mysql');
const pool = mysql.createPool({
host : 'x',
user : 'x',
password : '#x',
database : 'x',
connectionLimit: 2
});
function executeQuery(){
pool.getConnection((err, connection) => {
if(err) console.log(err);
let query = mysql.format("SELECT * FROM users WHERE id = ?", 1);
connection.query(query, (err, rows) => {
if(err) console.log(err);
console.log(rows);
});
});
}
executeQuery(); // outputs the user as expected
executeQuery(); // outputs the user as expected
executeQuery(); // there is no output in the console, it just looks like nothing happened
My question: How to find out if there are still connections available and if there are no connection available anymore show an error or handle it in a different way?
You forgot to release your connection:
function executeQuery(){
pool.getConnection((err, connection) => {
if(err) console.log(err);
connection.query("SELECT * FROM users WHERE id = ?", [ 1 ], (err, rows) => {
connection.release(); // Give it back or else it gets lost
if(err) console.log(err);
console.log(rows);
});
});
}
There's also no reason to grab the connection like that, you can just use the pool:
function executeQuery() {
pool.query("SELECT * FROM users WHERE id = ?", [ 1 ], (err, connection) => {
if(err) console.log(err);
console.log(rows);
});
}

Node.js Mysql stopped querying after a few minutes

I have a Discord.js bot with a MySQL as a database. The problem I'm having is that the SQL stopped querying after a random amount of times, the only way that I could fix this is by restarting the node.js app
My bot involves a lot of sql querying inside of an sql query similar to :
sql.query(`SELECT xxxxx` , (err, res) => {
sql.query(`SELECT xxxxx`, (err, result) => {}
}
And my SQL pool code is :
const mysql = require('mysql');
const pool = mysql.createPool({
host : "localhost",
port : 3306,
user : "x",
password: "x",
database: 'x'
});
let sql = {};
sql.query = function(query, params, callback) {
pool.getConnection(function(err, connection) {
if(err) {
if (callback) callback(err, null, null);
return;
}
connection.query(query, params, function(error, results, fields) {
connection.release();
if(error) {
if (callback) callback(error, null, null);
return;
}
if (callback) callback(false, results, fields);
});
});
};
module.exports = sql;
My VPS is running fine, my SQL server is running fine as well. I'm not sure what's causing the problem.
My current solution is running a cronjob every 30 minutes to restart the application, I'm not sure if this is a good practice or not.

Lambda NodeJS MySQL Task Timed out

I am trying to learn how to connect MySQL using lambda functions in AWS. I have followed a couple of instructions online and basically ended up with this code:
var mysql = require('mysql');
var pool = mysql.createPool({
connectionLimit : 1000,
connectTimeout : 60 * 60 * 1000,
acquireTimeout : 60 * 60 * 1000,
timeout : 60 * 60 * 1000,
host: "foo-bar-123.us-east-2.rds.amazonaws.com",
user: "root",
password: "pass123",
database: "sample_db",
});
exports.handler = (event, context, callback) => {
// prevent timeout from waiting event loop
context.callbackWaitsForEmptyEventLoop = false;
pool.getConnection(function(err, connection) {
if (err) throw err;
// Use the connection
connection.query('SELECT id FROM customer limit 10;', function (error, results, fields) {
// And done with the connection.
connection.release();
// Handle error after the release.
if (error) callback(error);
else callback(null,results);
});
});
};
This is working on my local but when I zip this code and uploaded it as a lambda function, this returns
Response:
{
"errorMessage": "2018-11-13T02:16:10.339Z 0562b432-e6ea-11e8-81ef-bd64aa1af0a4 Task timed out after 30.03 seconds"
}
It times out no matter how many seconds I set it to.
I have pretty much set everything at default since I am new to all of these but I have added AmazonRDSFullAccess to the role of lambda function.
Does anyone have any idea what may be wrong or missing in my setup?
Thanks.
After doing some trial and errors, I was able to make it work and what I was missing is that I did not allow All TCP in the inbound of my RDS Security group. After that, I set it as my lambda function to No VPC, and it was able to query properly.
This link: https://dzone.com/articles/querying-rds-mysql-db-with-nodejs-lambda-function and the stack overflow link posted in there (which is this: AWS Lambda RDS connection timeout) helped me a lot in figuring out what was wrong with my code/setup.
Here is the final code that I ended up using.
const mysql = require('mysql');
const pool = mysql.createPool({
host: "foo-bar-123.us-east-2.rds.amazonaws.com",
user: "root",
password: "pass123",
database: "sample_db"
});
exports.handler = (event, context, callback) => {
//prevent timeout from waiting event loop
context.callbackWaitsForEmptyEventLoop = false;
pool.getConnection((err, connection) => {
if(err) throw err;
// Use the connection
connection.query('SELECT id FROM customer limit 10;', (error, results, fields) => {
// And done with the connection.
connection.release();
// Handle error after the release.
if (error) callback(error);
else callback(null,results);
});
});
};
Thanks!

Store mysql query rows in variable for later use

I'm doing a monitoring system project in which I have Arduino sensors data being sent to a node.js server (thru GET requests) and then stored in a MySQL DB.
Whenvever I successfully send data to the server, it connects to the MySQL DB and queries the last 5 received records to do some processing.
Therefore, I need to store the rows of those 5 records in a variable for later use. Meaning that I have to get rows from a connection.query in a variable.
I read that the fact that I'm not able to do this is because node.js being async. So my questions are:
Is it possible to do the described tasks the way I'm trying?
If not, is there any other way to do so?
I'm not putting the whole code here but I'm running a separated test that also doesn't run properly. Here it is:
var mysql = require('mysql');
var con = mysql.createConnection({
host : "127.0.0.1",
user : "root",
password: "xxxx",
database: "mydb",
port : 3306
});
var queryString = "SELECT id, temp1, temp2, temp3, temp4, level_ice_bank, flow FROM tempdata ORDER BY id DESC LIMIT 5";
con.connect(function(err) {
if (err) throw err;
});
var result_arr = [];
function setValue (value) {
result_arr = value;
}
con.query(queryString, function (err, rows, fields) {
if (err) throw err;
else {
//console.log(rows);
setValue(rows);
}
});
console.log(result_arr);
It logs:
[]
But if I uncomment console.log(rows); it logs what I need to store in the variable result_arr.
Thanks in advance to all.
You're seeing this behaviour because con.query(...) is an asynchronous function. That means that:
console.log(result_arr);
Runs before:
con.query(queryString, function (err, rows, fields) {
if (err) throw err;
else {
//console.log(rows);
setValue(rows);
}
});
(Specifically, the setValue(rows) call)
To fix this in your example, you can just do:
con.query(queryString, function (err, rows, fields) {
if (err) throw err;
else {
setValue(rows);
console.log(result_arr);
}
});
If you want to do more than just log the data, then you can call a function which depends on result_arr from the con.query callback, like this:
con.query(queryString, function (err, rows, fields) {
if (err) throw err;
else {
setValue(rows);
doCleverStuffWithData();
}
});
function doCleverStuffWithData() {
// Do something with result_arr
}

Node.JS and MySQL - queries lock up and execute extremely slowly

I am getting strange behavior using Node.JS and MySQL with this driver - https://github.com/mysqljs/mysql
Essentially, I have a button on the frontend that triggers an app.get that makes a query in the database and I can happily use the results in my backend.
This works nicely, until I press the button 4-5 times in a second, where as the queries lock up and I have to wait for 2-3 minutes until they continue executing. I have a similar write function that behaves the same way.
Is it possible this is a problem, because I'm trying to execute the exact same query asynchronously? I.e. do I have to limit this from the front end or is it a backend problem?
Any ideas on how to debug what exactly is going on?
// database.js
var mysql = require('mysql');
var pool = mysql.createPool({
connectionLimit: 100,
host : 'localhost',
user : 'secret',
password : 'secret',
database : 'mydb'
});
exports.getConnection = function(callback) {
pool.getConnection(function(err, connection) {
callback(err, connection);
});
};
// dbrw.js
var con = require('../config/database');
function read(id, done) {
con.getConnection(function(err, connection){
if(!err){
connection.query("SELECT * FROM users WHERE id = ?",[id], function(err, rows) {
connection.release();
if (err)
done(err);
if (rows.length) {
console.log("rows " + JSON.stringify(rows));
done(rows[0].progress);
};
});
}
else {
console.log(err);
}
});
}
exports.read = read;
// routes.js
var dbrw = require('./dbrw.js');
app.get('/read', isLoggedIn, function(req, res) {
dbrw.read(req.user.id, function(result) {
console.log(result);
});
});
// Frontend - angular app.js
$scope.tryread = function() {
$http.get('/read');
}
Thanks in advance for any input.
I see a few issues:
function read(id, done) {
con.getConnection(function(id, connection){...}
}
Notice how you overwrite the id passed to read by giving that same name to an argument of the callback to getConnection.
Also, your Express route doesn't actually end the request by sending back a response, which will make your browser time out the connection. At some point, it will even refuse to send more requests because too many are still pending.
So make sure to end the request:
app.get('/read', isLoggedIn, function(req, res) {
dbrw.read(req.user.id, function(result) {
console.log(result);
res.end(); // or `res.send(result)`
});
});
And a tip: you should use the callback calling convertion for Node, where the first argument represents an error (if there is any) and the second argument represents the return value.