The situation is: In one http GET request I need to select from one table the information I need and send to the client, and at the same time I need to retrieve the user IP and insert into a database. I'm using Node.js for this experiment.
The thing is: Is there a way to make the two actions together? Or do I have to connect and make two separate queries? Is there a way to render the page and do the other INSERT action in the background? What is the fastest option?
app.get('/', function({
connect.query("SELECT column1, column2 FROM table;", function(err, ...
render("index", ...);
});
connect.query("INSERT INTO table2 SET ip=11111111;");
});
The procedure approach suggested by #skv is nice but you have to wait for the write before doing the read and eventually returning a result to the user.
I would argue for another approach.
Queue the ip-address and a timestamp internally in something like an array or list.
Do the read from the database and return a result to the user
Create a background job that will nibble of the internal array and do the inserts
This has several benefits
The user gets a result faster
The writes can be done later if the system is being called in bursts
The writes can be done in batches of tens or hundreds of inserts reducing the time it takes to write one row.
You can make a stored procedure do this
Basically these are two different operations, but doing it in stored procedures might give you the assurance that it will surely happen, you can pass the IP address as the parameter into the stored procedure, this will also avoid any worries of performance in the code for you as the db takes care of insert, please remember that any select that does not insert into a table or a variable will produce a result set for you to use, hope this helps.
DELIMITER $
CREATE PROCEDURE AddIPandReturnInfo
(
#IPAddress varchar(20)
)
BEGIN
INSERT INTO Yourtable (IPAddress);
SELECT * FROM Tablename;
END $
DELIMITER ;
Well, I assume you're using this module https://github.com/felixge/node-mysql
The MySQL protocol is sequential, then, to execute paralell queries against mysql, you need multiple connections. You can use a Pool to manage the connections.(builtin in the module)
Example:
var mysql = require('mysql');
var pool = mysql.createPool({
host: 'example.org',
user: 'bob',
password: 'secret',
connectionLimit: 5 // maximum number of connections to create at once **10 by default**
});
app.get('/', function (req, res) {
// get a connection from the pool //async
pool.getConnection(function (err, connection) {
// Use the connection
connection.query('SELECT something FROM table1', function (err, rows) {
// Do somethig whith the mysql_response and end the client_response
res.render("index", {...
});
connection.release();
// Don't use the connection here, it has been closed.
});
});
//async
var userIp = req.connection.remoteAddress || req.headers['x-forwarded-for'] || null;
if (userIp) {
// get a connection from the pool again
pool.getConnection(function (err, connection) {
// Use the connection
connection.query('INSERT INTO table2 SET ip=?', [userIp], function (err, rows) {
// And done with the insert.
connection.release(); // Conn Close.
});
});
}
});
Related
I have used one signle connection object of MYSQL in node JS to serve for multiple users.
I mean to say that MySQL connection will be created upon starting the script & it will remain same until the life of the node script/server.
Practically, this is possible to do & i have done the same. Please take a look at below code of NodeJS/MySQL script.
#################################
var http = require('http');
var mysql = require('mysql');
var con = mysql.createConnection({
host: "192.168.1.105",
user: "root",
password: "XXXXXX",
database: "mydb"
});
con.connect(function(err) {
if (err) {
console.error('error: ' + err.message);
process.exit(1);
}
http.createServer(function (req, res) {
continueExecution(req,res);
}).listen(8082);
});
async function continueExecution(req,res){
res.write('calledddd\n');
for (let step = 0; step < 50; step++) {
// Runs 5 times, with values of step 0 through 4.
var bar = `Company Inc ${step}`;
var sql = `INSERT INTO customers (name, address) VALUES ('${bar}', 'Highway 37')`;
res.write(sql + "\n");
con.query(sql, function (err, result) {
if (err) throw err;
res.write("1 record inserted\n");
});
}
res.write('reached\n');
for (let ste = 0; ste < 50; ste++) {
res.write('started Update\n');
var bar = `Company Inc ${ste}`;
var sql = `UPDATE customers SET name = 'UPDATE RECORD' WHERE name = '${bar}'`;
con.query(sql, function (err, result) {
if (err) throw err;
res.write(result.affectedRows + " record(s) updated\n");
if(ste == 50) {
res.writeHead(200, {'Content-Type': 'text/html\n'});
res.write('Databse connected\n');
res.end();
}
});
}
}
#################################
I have several questions in my mind as i am technical expert. But i didn't find any resources over my questions. Please help me on this
Q1. Are there any type of consequences of using one single MySQL connection to provide response to multiple users?
Q2. Let's take an example.
100 users wants to access table name "users_data" at the same time. 25 users are updating their records on the same table with unique primary key. 50 users are selecting their records. another 25 users deleting their records.
All these operations are being done at the same time via parallel Node Script calls from remote device.
To complete all these MySQL transactions, system is using only 1 database connection.
What will happen in this case?
To answer your questions, one of the consequences of using a unique connection is that it can lead to slower request execution.
In fact, even if node will make the requests asynchronously, your database will execute all those requests synchronously, so one after the other in the order they came. As node makes the requests asynchronously, the order in which they are executed by your database is not granted, and the issue you are referencing to might happen.
One easy way to avoid this is to use a connection pool which will create a given number of connection, using the same db user. Here are some links that might help you with this :
using a connection pool with node.js
connect a mysql database with node.js
I am trying to use async await with mysql2 and pooling but I think I doing things wrong. Below is my code (as I said, I am not really sure if I am doing things right here).
const pool = mysql.createPool({
host: 'localhost',
user: 'root',
database: 'test',
password: '',
waitForConnections: true,
connectionLimit: 10,
queueLimit: 0
});
const promisePool = pool.promise(); // Get a Promise wrapped instance of that pool
app.post('/api/register', (req, res) => {
async function queryDB() {
try {
const addUser = await promisePool.execute(
"INSERT INTO users (company, email, password) VALUES (?,?,?)",
['Ikea', 'Ikea#ikea.com', '123'],
)
const addAnother = await promisePool.execute(
"INSERT INTO users (company, email, password) VALUES (?,?,?)",
['Google', 'Google#google.com', '123']
)
console.log(addUser)
res.status(200).json('Users saved')
} catch (err) {
console.log(err)
res.status(409).json('User already exist')
};
}
queryDB();
})
The idea is that "addUser" should be saved, and if it is not unique, an error will occur (because the SQL database email column is set to unique) and then addAnother won't start inserting.
What I can't understand is how to get the insertId from the addUser insert? I will need it for the second insert. If I console.log addUser I can see an object like this:
ResultSetHeader {
fieldCount: 0,
affectedRows: 1,
insertId: 72,
info: '',
serverStatus: 2,
warn
So insertId is sent back to me, but I can't seem to reach it. If I try to grab it it says cant read property of undefined. I am feeling that I am doing this all wrong, so how should I do it instead to get the insertID?
It seems like the result was in an array. I solved it with: addUser[0].insertId. I never got LAST_INSERT_ID() to work though.
If you insert multiple rows into the same table, and the table has an AUTO_INCREMENT primary key style column, the server assigns the values for you. You don't need to know the insertId of the first row you insert to get the second row inserted correctly.
The insertId value you need is in addUser.insertId.
const addUser = await promisePool.execute(
"INSERT INTO users (company, email, password) VALUES (?,?,?)",
['Ikea', 'Ikea#ikea.com', '123'],
)
const theInsertIdForAddUser = addUser.insertId
But notice that it also can be retrieved directly in the next MySQL statement via the SQL LAST_INSERT_ID() function. That lets you do an operation like this.
INSERT INTO users_properties
(key, value, userId)
VALUES ('tel','555-1212',LAST_INSERT_ID());
You don't have to send the value back.
insertId: 72
You have it in you response.
Just wanted to say, the reason why you couldnt get LAST_INSERT_ID() to work is because you are running your query directly from the connection pool.
So the pool will allocate one of it's connections to run your INSERT statement, but then it will allocate another connection to run your LAST_INSERT_ID() query. The second connection can't see the first connection's last inserted id.
Connection pools are great for performance and for making sure you never go over your hosting provider's max db connections limit...
So the solution for those times when you do need LAST_INSERT_ID(), or any time you need to run dependant statements one after the other, all on the same connection, is to use the pool.getConnection() -> connection.query() -> connection.release() code flow.
I'm trying to get the smallest ID (by world) that is not used via this SQL query:
"SELECT MAX(`objects`.`id`) as nextID FROM `objects` WHERE `objects`.`world`='1'"
NodeJS:
var mysql = require('mysql');
var pool = mysql.createPool({
connectionLimit: 10,
host: '********',
user: '*********',
password: '*******',
database: databaseName
});
function getNextObjectID(worldID, cb) {
var q = "SELECT MAX(`objects`.`id`) as nextID FROM `objects` WHERE `objects`.`world`='"+worldID+"'";
console.log(q);
pool.query(q, function(err, results, fields) {
console.log(err);
console.log(results);
console.log(fields);
});
}
Previously, I had a more in depth approach that included ids used previously, but it also was having this issue so I've reverted down to this simpler method.
I run this through node and phpmyadmin. When node is doing it, it automatically inserts the world id (and yes I print out the actual query and get that it is identical upon execution). When phpmyadmin executes it returns 14. When node executes its rarely 14 and most of the time null. I have no idea why it would change. All other queries behave normally.
There was an asynchronous delete being called by someone else, it was reading next id before a large amount of rows got inserted.
I am trying to use redis to store a list of users and weather or not they are online or offline and displaying that information to other users.
I am fairly new to node and I believe that I need to use either a list or sorted sets.
when it gets to the console.log(reply); line it only shows "Object"
I think I need to loop through the results of the query to build the list but I am not really sure 1) how to loop through the results directly in the server application and 2) how to build the list or sorted set based on that query.
Any advice or suggestions would be greatly appreciated.
var mysql = require('mysql');
var connection = mysql.createConnection({
host : 'localhost',
user : 'root',
password : 'password',
database : 'users'
});
var redis = require('redis')
, client = redis.createClient();
connection.connect();
connection.query('SELECT * FROM user_profile', function(err, rows, fields)
{
if (err) throw err;
client.set('string key', rows[0], redis.print);
client.get("string key", function (err, reply) {
console.log(reply);
});
});
connection.end();
1) I assume rows contains an array of objects, each object representing a user data record.
client.set('string key', rows[0], redis.print);
is storing the whole first object of rows array, you can use a foreach statement to loop over all values returned.
You are saving the whole object in redis, but you only need the online/offline state 1 or 0. Besides, you can store only strings in redis keys (see Redis Keys Docs and Redis Set Docs)
2) You don't need a list or sorted sets only for online/offline state of a user, unless you need some sorting operations later.
You can use simple keys, I suggest using a pattern like this for key name: "user:".
// assuming that user_name property exists, holds username data "david" and it's unique
client.set("user:"+row[0].user_name, 0, redis.print); // stores key "user:david" = "0";`
Then to retrieve it use:
client.get("user:"+row[0].user_name);
So, your sql query callback function could look like this:
function(err, rows, fields) {
if (err) throw err;
rows.forEach(function(element, index, array){
client.set('user:'+element.user_name, 0, redis.print);
client.get("user:"+element.user_name, function (err, reply) {
console.log(reply);
});
});
}
Please note that the user name must be unique. You can use user ID's if not
I am using node-mysql in my project. I have a scenario where the execution of the second query will be based on the result returned from the first. For example, first query, select * from table where user=x; then based on the existence I will run another query. I am using pool.getConnection(). May I know whether I should create two connections or use one connection?
Option a)
pool.getConnection(function(err, connection) {
// Use the connection
connection.query( 'SELECT something FROM sometable', function(err, rows) {
//store the result here as a var a;
connection.release();
});
});
//check var a and start another connection to run the second query?
Option b)
pool.getConnection(function(err, connection) {
// Use the connection
connection.query( 'SELECT something FROM sometable', function(err, rows) {
//...based on the result,
connection.query(second query)
connection.release();
});
});
Thanks
Hammer
Both options are using a pool, so you're already using multiple connections. So either option doesn't really matter, the only difference might be that the first option may result in the second query not being executed right away if the entire pool is in use after the first query is done.